fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "cargo {}.{}.{}",
self.major, self.minor, self.patch)?;
- match self.cfg_info.as_ref().map(|ci| &ci.release_channel) {
- Some(channel) => {
- if channel != "stable" {
- write!(f, "-{}", channel)?;
- let empty = String::from("");
- write!(f, "{}", self.pre_release.as_ref().unwrap_or(&empty))?;
- }
- },
- None => (),
+ if let Some(channel) = self.cfg_info.as_ref().map(|ci| &ci.release_channel) {
+ if channel != "stable" {
+ write!(f, "-{}", channel)?;
+ let empty = String::from("");
+ write!(f, "{}", self.pre_release.as_ref().unwrap_or(&empty))?;
+ }
};
if let Some(ref cfg) = self.cfg_info {
let mut paths = Vec::new();
let mut deps = deps.split(' ').map(|s| s.trim()).filter(|s| !s.is_empty());
- loop {
- let mut file = match deps.next() {
- Some(s) => s.to_string(),
- None => break,
- };
- while file.ends_with("\\") {
+ while let Some(s) = deps.next() {
+ let mut file = s.to_string();
+ while file.ends_with('\\') {
file.pop();
file.push(' ');
file.push_str(deps.next().chain_error(|| {
- internal(format!("malformed dep-info format, trailing \\"))
+ internal("malformed dep-info format, trailing \\".to_string())
})?);
}
paths.push(cwd.join(&file));
fn dep_info_mtime_if_fresh(dep_info: &Path) -> CargoResult<Option<FileTime>> {
if let Some(paths) = parse_dep_info(dep_info)? {
- Ok(mtime_if_fresh(&dep_info, paths.iter()))
+ Ok(mtime_if_fresh(dep_info, paths.iter()))
} else {
Ok(None)
}
// the target triple as a Path and then just use the file stem as the
// component for the directory name.
if let Some(triple) = triple {
- path.push(Path::new(triple).file_stem().ok_or(human(format!("target was empty")))?);
+ path.push(Path::new(triple).file_stem().ok_or(human("target was empty".to_string()))?);
}
path.push(dest);
Layout::at(ws.config(), path)
let pkgid = unit.pkg.package_id();
if !unit.target.is_lib() { continue }
if unit.profile.doc { continue }
- if cx.compilation.libraries.contains_key(&pkgid) {
+ if cx.compilation.libraries.contains_key(pkgid) {
continue
}
}
}
- if let Some(feats) = cx.resolve.features(&unit.pkg.package_id()) {
+ if let Some(feats) = cx.resolve.features(unit.pkg.package_id()) {
cx.compilation.cfgs.entry(unit.pkg.package_id().clone())
- .or_insert(HashSet::new())
+ .or_insert_with(HashSet::new)
.extend(feats.iter().map(|feat| format!("feature=\"{}\"", feat)));
}
for (&(ref pkg, _), output) in cx.build_state.outputs.lock().unwrap().iter() {
cx.compilation.cfgs.entry(pkg.clone())
- .or_insert(HashSet::new())
+ .or_insert_with(HashSet::new)
.extend(output.cfgs.iter().cloned());
for dir in output.library_paths.iter() {
},
&mut |line| {
// stderr from rustc can have a mix of JSON and non-JSON output
- if line.starts_with("{") {
+ if line.starts_with('{') {
// Handle JSON lines
let compiler_message = json::Json::from_str(line).map_err(|_| {
internal(&format!("compiler produced invalid json: `{}`", line))
fn add_deps_for_unit<'a, 'b>(deps: &mut HashSet<PathBuf>, context: &mut Context<'a, 'b>,
unit: &Unit<'a>, visited: &mut HashSet<Unit<'a>>) -> CargoResult<()>
{
- if !visited.insert(unit.clone()) {
+ if !visited.insert(*unit) {
return Ok(());
}
// dep-info generation failed, so delete output file. This will usually
// cause the build system to always rerun the build rule, which is correct
// if inefficient.
- match fs::remove_file(output_path) {
- Err(err) => {
- if err.kind() != ErrorKind::NotFound {
- return Err(err.into());
- }
+ if let Err(err) = fs::remove_file(output_path) {
+ if err.kind() != ErrorKind::NotFound {
+ return Err(err.into());
}
- _ => ()
}
}
}
let mut errors = Vec::new();
for &(ref pkg, _, ref exe) in &compilation.tests {
- let to_display = match util::without_prefix(exe, &cwd) {
+ let to_display = match util::without_prefix(exe, cwd) {
Some(path) => path,
None => &**exe,
};
p.arg("--test-args").arg(arg);
}
- if let Some(cfgs) = compilation.cfgs.get(&package.package_id()) {
+ if let Some(cfgs) = compilation.cfgs.get(package.package_id()) {
for cfg in cfgs.iter() {
p.arg("--cfg").arg(cfg);
}
emit_package(root.as_table().unwrap(), &mut out);
}
- let deps = e.toml.get(&"package".to_string()).unwrap().as_slice().unwrap();
+ let deps = e.toml[&"package".to_string()].as_slice().unwrap();
for dep in deps.iter() {
let dep = dep.as_table().unwrap();
emit_package(dep, &mut out);
}
- match e.toml.get(&"metadata".to_string()) {
- Some(metadata) => {
- out.push_str("[metadata]\n");
- out.push_str(&metadata.to_string());
- }
- None => {}
+ if let Some(metadata) = e.toml.get(&"metadata".to_string()) {
+ out.push_str("[metadata]\n");
+ out.push_str(&metadata.to_string());
}
// If the lockfile contents haven't changed so don't rewrite it. This is
out.push_str(&format!("source = {}\n", lookup(dep, "source")));
}
- if let Some(ref s) = dep.get("dependencies") {
- let slice = Value::as_slice(*s).unwrap();
+ if let Some(s) = dep.get("dependencies") {
+ let slice = Value::as_slice(s).unwrap();
if !slice.is_empty() {
out.push_str("dependencies = [\n");
let (mut registry, reg_id) = registry(opts.config,
opts.token.clone(),
opts.index.clone())?;
- verify_dependencies(&pkg, ®_id)?;
+ verify_dependencies(pkg, ®_id)?;
// Prepare a tarball, with a non-surpressable warning if metadata
// is missing since this is being put online.
// Upload said tarball to the specified destination
opts.config.shell().status("Uploading", pkg.package_id().to_string())?;
- transmit(opts.config, &pkg, tarball.file(), &mut registry, opts.dry_run)?;
+ transmit(opts.config, pkg, tarball.file(), &mut registry, opts.dry_run)?;
Ok(())
}
Some(ref readme) => Some(paths::read(&pkg.root().join(readme))?),
None => None,
};
- match *license_file {
- Some(ref file) => {
- if fs::metadata(&pkg.root().join(file)).is_err() {
- bail!("the license file `{}` does not exist", file)
- }
+ if let Some(ref file) = *license_file {
+ if fs::metadata(&pkg.root().join(file)).is_err() {
+ bail!("the license file `{}` does not exist", file)
}
- None => {}
}
// Do not upload if performing a dry run
/// Favor cargo's `http.proxy`, then git's `http.proxy`. Proxies specified
/// via environment variables are picked up by libcurl.
fn http_proxy(config: &Config) -> CargoResult<Option<String>> {
- match config.get_string("http.proxy")? {
- Some(s) => return Ok(Some(s.val)),
- None => {}
+ if let Some(s) = config.get_string("http.proxy")? {
+ return Ok(Some(s.val))
}
- match git2::Config::open_default() {
- Ok(cfg) => {
- match cfg.get_str("http.proxy") {
- Ok(s) => return Ok(Some(s.to_string())),
- Err(..) => {}
- }
+ if let Ok(cfg) = git2::Config::open_default() {
+ if let Ok(s) = cfg.get_str("http.proxy") {
+ return Ok(Some(s.to_string()))
}
- Err(..) => {}
}
Ok(None)
}
}
pub fn http_timeout(config: &Config) -> CargoResult<Option<i64>> {
- match config.get_i64("http.timeout")? {
- Some(s) => return Ok(Some(s.val)),
- None => {}
+ if let Some(s) = config.get_i64("http.timeout")? {
+ return Ok(Some(s.val))
}
Ok(env::var("HTTP_TIMEOUT").ok().and_then(|s| s.parse().ok()))
}
let RegistryConfig { index, token: _ } = registry_configuration(config)?;
let mut map = HashMap::new();
let p = config.cwd().to_path_buf();
- match index {
- Some(index) => {
- map.insert("index".to_string(), ConfigValue::String(index, p.clone()));
- }
- None => {}
+ if let Some(index) = index {
+ map.insert("index".to_string(), ConfigValue::String(index, p.clone()));
}
map.insert("token".to_string(), ConfigValue::String(token, p));
let (mut registry, _) = registry(config, opts.token.clone(),
opts.index.clone())?;
- match opts.to_add {
- Some(ref v) => {
- let v = v.iter().map(|s| &s[..]).collect::<Vec<_>>();
- config.shell().status("Owner", format!("adding {:?} to crate {}",
- v, name))?;
- registry.add_owners(&name, &v).map_err(|e| {
- human(format!("failed to add owners to crate {}: {}", name, e))
- })?;
- }
- None => {}
+ if let Some(ref v) = opts.to_add {
+ let v = v.iter().map(|s| &s[..]).collect::<Vec<_>>();
+ config.shell().status("Owner", format!("adding {:?} to crate {}",
+ v, name))?;
+ registry.add_owners(&name, &v).map_err(|e| {
+ human(format!("failed to add owners to crate {}: {}", name, e))
+ })?;
}
- match opts.to_remove {
- Some(ref v) => {
- let v = v.iter().map(|s| &s[..]).collect::<Vec<_>>();
- config.shell().status("Owner", format!("removing {:?} from crate {}",
- v, name))?;
- registry.remove_owners(&name, &v).map_err(|e| {
- human(format!("failed to remove owners from crate {}: {}", name, e))
- })?;
- }
- None => {}
+ if let Some(ref v) = opts.to_remove {
+ let v = v.iter().map(|s| &s[..]).collect::<Vec<_>>();
+ config.shell().status("Owner", format!("removing {:?} from crate {}",
+ v, name))?;
+ registry.remove_owners(&name, &v).map_err(|e| {
+ human(format!("failed to remove owners from crate {}: {}", name, e))
+ })?;
}
if opts.list {
let resolved_with_overrides =
ops::resolve_with_previous(&mut registry, ws,
method, Some(&resolve), None,
- &specs)?;
+ specs)?;
for &(ref replace_spec, _) in ws.root_replace() {
if !resolved_with_overrides.replacements().keys().any(|r| replace_spec.matches(r)) {
// crates and otherwise may conflict with a VCS
// (rust-lang/cargo#3414).
if let Some(s) = path.file_name().and_then(|s| s.to_str()) {
- if s.starts_with(".") {
+ if s.starts_with('.') {
continue
}
}
trace!("updating git source `{:?}`", self.remote);
- let repo = self.remote.checkout(&db_path, &self.config)?;
+ let repo = self.remote.checkout(&db_path, self.config)?;
let rev = repo.rev_for(&self.reference)?;
(repo, rev)
} else {
// in scope so the destructors here won't tamper with too much.
// Checkout is immutable, so we don't need to protect it with a lock once
// it is created.
- repo.copy_to(actual_rev.clone(), &checkout_path, &self.config)?;
+ repo.copy_to(actual_rev.clone(), &checkout_path, self.config)?;
let source_id = self.source_id.with_precise(Some(actual_rev.to_string()));
let path_source = PathSource::new_recursive(&checkout_path,
pub fn checkout(&self, into: &Path, cargo_config: &Config) -> CargoResult<GitDatabase> {
let repo = match git2::Repository::open(into) {
Ok(repo) => {
- self.fetch_into(&repo, &cargo_config).chain_error(|| {
+ self.fetch_into(&repo, cargo_config).chain_error(|| {
human(format!("failed to fetch into {}", into.display()))
})?;
repo
}
Err(..) => {
- self.clone_into(into, &cargo_config).chain_error(|| {
+ self.clone_into(into, cargo_config).chain_error(|| {
human(format!("failed to clone into: {}", into.display()))
})?
}
// Create a local anonymous remote in the repository to fetch the url
let url = self.url.to_string();
let refspec = "refs/heads/*:refs/heads/*";
- fetch(dst, &url, refspec, &cargo_config)
+ fetch(dst, &url, refspec, cargo_config)
}
fn clone_into(&self, dst: &Path, cargo_config: &Config) -> CargoResult<git2::Repository> {
}
fs::create_dir_all(dst)?;
let repo = git2::Repository::init_bare(dst)?;
- fetch(&repo, &url, "refs/heads/*:refs/heads/*", &cargo_config)?;
+ fetch(&repo, &url, "refs/heads/*:refs/heads/*", cargo_config)?;
Ok(repo)
}
}
Ok(repo) => {
let checkout = GitCheckout::new(dest, self, rev, repo);
if !checkout.is_fresh() {
- checkout.fetch(&cargo_config)?;
+ checkout.fetch(cargo_config)?;
checkout.reset()?;
assert!(checkout.is_fresh());
}
}
Err(..) => GitCheckout::clone_into(dest, self, rev)?,
};
- checkout.update_submodules(&cargo_config).chain_error(|| {
+ checkout.update_submodules(cargo_config).chain_error(|| {
internal("failed to update submodules")
})?;
Ok(checkout)
let url = self.database.path.to_url()?;
let url = url.to_string();
let refspec = "refs/heads/*:refs/heads/*";
- fetch(&self.repo, &url, refspec, &cargo_config)?;
+ fetch(&self.repo, &url, refspec, cargo_config)?;
Ok(())
}
}
fn update_submodules(&self, cargo_config: &Config) -> CargoResult<()> {
- return update_submodules(&self.repo, &cargo_config);
+ return update_submodules(&self.repo, cargo_config);
fn update_submodules(repo: &git2::Repository, cargo_config: &Config) -> CargoResult<()> {
info!("update submodules for: {:?}", repo.workdir().unwrap());
// Fetch data from origin and reset to the head commit
let refspec = "refs/heads/*:refs/heads/*";
- fetch(&repo, url, refspec, &cargo_config).chain_error(|| {
+ fetch(&repo, url, refspec, cargo_config).chain_error(|| {
internal(format!("failed to fetch submodule `{}` from {}",
child.name().unwrap_or(""), url))
})?;
let obj = repo.find_object(head, None)?;
repo.reset(&obj, git2::ResetType::Hard, None)?;
- update_submodules(&repo, &cargo_config)?;
+ update_submodules(&repo, cargo_config)?;
}
Ok(())
}
let username = username.unwrap();
debug_assert!(!ssh_username_requested);
ssh_agent_attempts.push(username.to_string());
- return git2::Cred::ssh_key_from_agent(&username)
+ return git2::Cred::ssh_key_from_agent(username)
}
// Sometimes libgit2 will ask for a username/password in plaintext. This
res.chain_error(|| {
let mut msg = "failed to authenticate when downloading \
repository".to_string();
- if ssh_agent_attempts.len() > 0 {
+ if !ssh_agent_attempts.is_empty() {
let names = ssh_agent_attempts.iter()
.map(|s| format!("`{}`", s))
.collect::<Vec<_>>()
cb.credentials(f);
// Create a local anonymous remote in the repository to fetch the url
- let mut remote = repo.remote_anonymous(&url)?;
+ let mut remote = repo.remote_anonymous(url)?;
let mut opts = git2::FetchOptions::new();
opts.remote_callbacks(cb)
.download_tags(git2::AutotagOption::All);
target.display()))
})?;
let refspec = "refs/heads/*:refs/heads/*";
- fetch(&repo, &url, refspec, &config).chain_error(||{
+ fetch(&repo, url, refspec, &config).chain_error(||{
human(format!("failed to fecth `{}`", url))
})?;
let reference = "HEAD";
.collect::<Result<Vec<_>, _>>()?;
let mut filter = |p: &Path| {
- let relative_path = util::without_prefix(p, &root).unwrap();
- include.iter().any(|p| p.matches_path(&relative_path)) || {
+ let relative_path = util::without_prefix(p, root).unwrap();
+ include.iter().any(|p| p.matches_path(relative_path)) || {
include.is_empty() &&
- !exclude.iter().any(|p| p.matches_path(&relative_path))
+ !exclude.iter().any(|p| p.matches_path(relative_path))
}
};
let index_files = index.iter().map(|entry| {
use libgit2_sys::GIT_FILEMODE_COMMIT;
let is_dir = entry.mode == GIT_FILEMODE_COMMIT as u32;
- (join(&root, &entry.path), Some(is_dir))
+ (join(root, &entry.path), Some(is_dir))
});
let mut opts = git2::StatusOptions::new();
opts.include_untracked(true);
- if let Some(suffix) = util::without_prefix(pkg_path, &root) {
+ if let Some(suffix) = util::without_prefix(pkg_path, root) {
opts.pathspec(suffix);
}
let statuses = repo.statuses(Some(&mut opts))?;
let untracked = statuses.iter().filter_map(|entry| {
match entry.status() {
- git2::STATUS_WT_NEW => Some((join(&root, entry.path_bytes()), None)),
+ git2::STATUS_WT_NEW => Some((join(root, entry.path_bytes()), None)),
_ => None
}
});
let mut subpackages_found = Vec::new();
- 'outer: for (file_path, is_dir) in index_files.chain(untracked) {
+ for (file_path, is_dir) in index_files.chain(untracked) {
let file_path = file_path?;
// Filter out files blatantly outside this package. This is helped a
if is_dir.unwrap_or_else(|| file_path.is_dir()) {
warn!(" found submodule {}", file_path.display());
- let rel = util::without_prefix(&file_path, &root).unwrap();
+ let rel = util::without_prefix(&file_path, root).unwrap();
let rel = rel.to_str().chain_error(|| {
human(format!("invalid utf-8 filename: {}", rel.display()))
})?;
/// specified.
pub fn summaries(&mut self, name: &str) -> CargoResult<&Vec<(Summary, bool)>> {
if self.cache.contains_key(name) {
- return Ok(self.cache.get(name).unwrap());
+ return Ok(&self.cache[name]);
}
let summaries = self.load_summaries(name)?;
let summaries = summaries.into_iter().filter(|summary| {
summary.0.package_id().name() == name
}).collect();
self.cache.insert(name.to_string(), summaries);
- Ok(self.cache.get(name).unwrap())
+ Ok(&self.cache[name])
}
fn load_summaries(&mut self, name: &str) -> CargoResult<Vec<(Summary, bool)>> {
let mut contents = String::new();
f.read_to_string(&mut contents)?;
let ret: CargoResult<Vec<(Summary, bool)>>;
- ret = contents.lines().filter(|l| l.trim().len() > 0)
+ ret = contents.lines().filter(|l| !l.trim().is_empty())
.map(|l| self.parse_registry_package(l))
.collect();
ret.chain_error(|| {
};
debug!("attempting github fast path for {}",
self.source_id.url());
- if github_up_to_date(handle, &self.source_id.url(), &oid) {
+ if github_up_to_date(handle, self.source_id.url(), &oid) {
return Ok(())
}
debug!("fast path failed, falling back to a git fetch");
let url = self.source_id.url().to_string();
let refspec = "refs/heads/*:refs/remotes/origin/*";
- git::fetch(&repo, &url, refspec, &self.config).chain_error(|| {
+ git::fetch(&repo, &url, refspec, self.config).chain_error(|| {
human(format!("failed to fetch `{}`", url))
})?;
}
fn fingerprint(&self, id: &Package) -> CargoResult<String> {
- self.inner.fingerprint(&id)
+ self.inner.fingerprint(id)
}
fn verify(&self, id: &PackageId) -> CargoResult<()> {
}
pub fn get_path(&self, key: &str) -> CargoResult<Option<Value<PathBuf>>> {
- if let Some(val) = self.get_string(&key)? {
+ if let Some(val) = self.get_string(key)? {
let is_path = val.val.contains('/') ||
(cfg!(windows) && val.val.contains('\\'));
let path = if is_path {
let mut contents = String::new();
file.read_to_string(&mut contents)?;
let table = cargo_toml::parse(&contents,
- &path,
+ path,
self).chain_error(|| {
human(format!("could not parse TOML configuration in `{}`",
path.display()))
})?;
let toml = toml::Value::Table(table);
- let value = CV::from_toml(&path, toml).chain_error(|| {
+ let value = CV::from_toml(path, toml).chain_error(|| {
human(format!("failed to load TOML configuration from `{}`",
path.display()))
})?;
for dep in dependencies {
assert!(my_dependencies.insert(dep.clone()));
let rev = self.reverse_dep_map.entry(dep.clone())
- .or_insert(HashSet::new());
+ .or_insert_with(HashSet::new);
assert!(rev.insert(key.clone()));
}
&mut slot.insert((my_dependencies, value)).1
if let Some(out) = output {
match str::from_utf8(&out.stdout) {
- Ok(s) if s.trim().len() > 0 => {
+ Ok(s) if !s.trim().is_empty() => {
desc.push_str("\n--- stdout\n");
desc.push_str(s);
}
Ok(..) | Err(..) => {}
}
match str::from_utf8(&out.stderr) {
- Ok(s) if s.trim().len() > 0 => {
+ Ok(s) if !s.trim().is_empty() => {
desc.push_str("\n--- stderr\n");
desc.push_str(s);
}
}
Ok(absolute_path)
},
- None => find_project_manifest(&cwd, "Cargo.toml"),
+ None => find_project_manifest(cwd, "Cargo.toml"),
}
}
where F: FnOnce() -> Result<T, Error>
{
if self.borrow().is_none() {
- if let Err(_) = self.fill(init()?) {
+ if self.fill(init()?).is_err() {
unreachable!();
}
}
if let Some(param) = h.param(0) {
let txt = param.value().as_string().unwrap_or("").to_owned();
let rendered = format!("{}", toml::Value::String(txt));
- try!(rc.writer.write(rendered.into_bytes().as_ref()));
+ try!(rc.writer.write_all(rendered.into_bytes().as_ref()));
}
Ok(())
}
rc: &mut RenderContext) -> Result<(), RenderError> {
if let Some(param) = h.param(0) {
let rendered = html_escape(param.value().as_string().unwrap_or(""));
- try!(rc.writer.write(rendered.into_bytes().as_ref()));
+ try!(rc.writer.write_all(rendered.into_bytes().as_ref()));
}
Ok(())
}
subdir: Option<&'a str>) -> CargoResult<TemplateType> {
match (repo, subdir) {
(Some(repo_str), _) => {
- if let Ok(repo_url) = Url::parse(&repo_str) {
+ if let Ok(repo_url) = Url::parse(repo_str) {
let supported_schemes = ["git", "file", "http", "https", "ssh"];
if supported_schemes.contains(&repo_url.scheme()) {
Ok(TemplateType::GitRepo(repo_url.into_string()))
}
}
fn try_add_files(files: &mut Vec<PathBuf>, root: PathBuf) {
- match fs::read_dir(&root) {
- Ok(new) => {
- files.extend(new.filter_map(|dir| {
- dir.map(|d| d.path()).ok()
- }).filter(|f| {
- f.extension().and_then(|s| s.to_str()) == Some("rs")
- }).filter(|f| {
- // Some unix editors may create "dotfiles" next to original
- // source files while they're being edited, but these files are
- // rarely actually valid Rust source files and sometimes aren't
- // even valid UTF-8. Here we just ignore all of them and require
- // that they are explicitly specified in Cargo.toml if desired.
- f.file_name().and_then(|s| s.to_str()).map(|s| {
- !s.starts_with('.')
- }).unwrap_or(true)
- }))
- }
- Err(_) => {/* just don't add anything if the directory doesn't exist, etc. */}
+ if let Ok(new) = fs::read_dir(&root) {
+ files.extend(new.filter_map(|dir| {
+ dir.map(|d| d.path()).ok()
+ }).filter(|f| {
+ f.extension().and_then(|s| s.to_str()) == Some("rs")
+ }).filter(|f| {
+ // Some unix editors may create "dotfiles" next to original
+ // source files while they're being edited, but these files are
+ // rarely actually valid Rust source files and sometimes aren't
+ // even valid UTF-8. Here we just ignore all of them and require
+ // that they are explicitly specified in Cargo.toml if desired.
+ f.file_name().and_then(|s| s.to_str()).map(|s| {
+ !s.starts_with('.')
+ }).unwrap_or(true)
+ }))
}
+ /* else just don't add anything if the directory doesn't exist, etc. */
}
pub fn to_manifest(contents: &str,
pub fn parse(toml: &str,
file: &Path,
config: &Config) -> CargoResult<toml::Table> {
- let mut first_parser = toml::Parser::new(&toml);
+ let mut first_parser = toml::Parser::new(toml);
if let Some(toml) = first_parser.parse() {
return Ok(toml);
}
return Ok(toml)
}
- let mut error_str = format!("could not parse input as TOML\n");
+ let mut error_str = "could not parse input as TOML\n".to_string();
for error in first_parser.errors.iter() {
let (loline, locol) = first_parser.to_linecol(error.lo);
let (hiline, hicol) = first_parser.to_linecol(error.hi);
Some(
TomlTarget {
name: lib.name.clone().or(Some(project.name.clone())),
- path: lib.path.clone().or(
- layout.lib.as_ref().map(|p| PathValue::Path(p.clone()))
+ path: lib.path.clone().or_else(
+ || layout.lib.as_ref().map(|p| PathValue::Path(p.clone()))
),
..lib.clone()
}
config: config,
warnings: &mut warnings,
platform: None,
- layout: &layout,
+ layout: layout,
};
fn process_dependencies(
-> CargoResult<()>
{
let dependencies = match new_deps {
- Some(ref dependencies) => dependencies,
+ Some(dependencies) => dependencies,
None => return Ok(())
};
for (n, v) in dependencies.iter() {
let exclude = project.exclude.clone().unwrap_or(Vec::new());
let include = project.include.clone().unwrap_or(Vec::new());
- let summary = Summary::new(pkgid, deps, self.features.clone() .unwrap_or(HashMap::new()))?;
+ let summary = Summary::new(pkgid, deps, self.features.clone() .unwrap_or_else(HashMap::new))?;
let metadata = ManifestMetadata {
description: project.description.clone(),
homepage: project.homepage.clone(),
// If there is a build.rs file next to the Cargo.toml, assume it is
// a build script
Ok(ref e) if e.is_file() => Some(build_rs.into()),
- Ok(_) => None,
- Err(_) => None,
+ Ok(_) | Err(_) => None,
}
}
}
}
let lib_target = |dst: &mut Vec<Target>, l: &TomlLibTarget| {
- let path = l.path.clone().unwrap_or(
- PathValue::Path(Path::new("src").join(&format!("{}.rs", l.name())))
+ let path = l.path.clone().unwrap_or_else(
+ || PathValue::Path(Path::new("src").join(&format!("{}.rs", l.name())))
);
let crate_types = match l.crate_type.clone() {
Some(kinds) => kinds.iter().map(|s| LibKind::from_str(s)).collect(),
for bin in bins.iter() {
let path = bin.path.clone().unwrap_or_else(|| {
let default_bin_path = PathValue::Path(default(bin));
- match package_root.join(default_bin_path.to_path()).exists() {
- true => default_bin_path, // inferred from bin's name
- false => PathValue::Path(Path::new("src").join("main.rs"))
+ if package_root.join(default_bin_path.to_path()).exists() {
+ default_bin_path // inferred from bin's name
+ } else {
+ PathValue::Path(Path::new("src").join("main.rs"))
}
});
let mut target = Target::bin_target(&bin.name(), package_root.join(path.to_path()),
fn assert_cargo_toml_doesnt_exist(command: &str, manifest_path_argument: &str) {
let p = project("foo");
let expected_path = manifest_path_argument
- .split("/").collect::<Vec<_>>().join("[..]");
+ .split('/').collect::<Vec<_>>().join("[..]");
assert_that(p.cargo_process(command)
.arg("--manifest-path").arg(manifest_path_argument)
#[test]
fn list_command_looks_at_path() {
let proj = project("list-non-overlapping");
- let proj = fake_file(proj, &Path::new("path-test"), "cargo-1", FakeKind::Executable);
+ let proj = fake_file(proj, Path::new("path-test"), "cargo-1", FakeKind::Executable);
let mut pr = cargo_process();
let mut path = path();
use cargotest::support::cargo_dir;
let proj = project("list-non-overlapping");
- let proj = fake_file(proj, &Path::new("path-test"), "cargo-2",
+ let proj = fake_file(proj, Path::new("path-test"), "cargo-2",
FakeKind::Symlink{target:&cargo_dir().join("cargo")});
let mut pr = cargo_process();
.publish();
let p = project("a")
- .file("Cargo.toml", &r#"
+ .file("Cargo.toml", r#"
[package]
name = "a"
version = "0.0.1"
#[test]
fn bad_target_spec() {
let p = project("a")
- .file("Cargo.toml", &r#"
+ .file("Cargo.toml", r#"
[package]
name = "a"
version = "0.0.1"
#[test]
fn bad_target_spec2() {
let p = project("a")
- .file("Cargo.toml", &r#"
+ .file("Cargo.toml", r#"
[package]
name = "a"
version = "0.0.1"
let repo = git2::Repository::open(&git_project.root()).unwrap();
let mut sub = git::add_submodule(&repo, &git_project2.url().to_string(),
- &Path::new("src"));
+ Path::new("src"));
git::commit(&repo);
let project = project
let repo = git2::Repository::open(&git1.root()).unwrap();
let url = path2url(git2.root()).to_string();
- git::add_submodule(&repo, &url, &Path::new("a/submodule"));
+ git::add_submodule(&repo, &url, Path::new("a/submodule"));
git::commit(&repo);
git2::Repository::init(&project.root()).unwrap();
fn cargo_process(s: &str) -> ProcessBuilder {
let mut p = cargotest::process(&cargo_dir().join("cargo"));
p.arg(s).cwd(&paths::root()).env("HOME", &paths::home());
- return p;
+ p
}
#[test]
#[test]
fn both_lib_and_bin() {
let td = TempDir::new("cargo").unwrap();
- assert_that(cargo_process("init").arg("--lib").arg("--bin").cwd(td.path().clone())
+ assert_that(cargo_process("init").arg("--lib").arg("--bin").cwd(td.path())
.env("USER", "foo"),
execs().with_status(101).with_stderr(
"[ERROR] can't specify both lib and binary outputs"));
fn no_filename() {
assert_that(cargo_process("init").arg("/"),
execs().with_status(101)
- .with_stderr(&format!("\
+ .with_stderr("\
[ERROR] cannot auto-detect project name from path \"/\" ; use --name to override
-")));
+".to_string()));
}
fn cargo_process(s: &str) -> ProcessBuilder {
let mut p = cargotest::cargo_process();
p.arg(s);
- return p
+ p
}
fn pkg(name: &str, vers: &str) {
fn cargo_process(s: &str) -> ProcessBuilder {
let mut p = cargotest::cargo_process();
p.arg(s);
- return p;
+ p
}
#[test]
#[test]
fn both_lib_and_bin() {
let td = TempDir::new("cargo").unwrap();
- assert_that(cargo_process("new").arg("--lib").arg("--bin").arg("foo").cwd(td.path().clone())
+ assert_that(cargo_process("new").arg("--lib").arg("--bin").arg("foo").cwd(td.path())
.env("USER", "foo"),
execs().with_status(101).with_stderr(
"[ERROR] can't specify both lib and binary outputs"));
#[test]
fn simple_git() {
let td = TempDir::new("cargo").unwrap();
- assert_that(cargo_process("new").arg("--lib").arg("foo").cwd(td.path().clone())
+ assert_that(cargo_process("new").arg("--lib").arg("foo").cwd(td.path())
.env("USER", "foo"),
execs().with_status(0));
assert_that(&td.path().join("foo/.git"), existing_dir());
assert_that(&td.path().join("foo/.gitignore"), existing_file());
- assert_that(cargo_process("build").cwd(&td.path().clone().join("foo")),
+ assert_that(cargo_process("build").cwd(&td.path().join("foo")),
execs().with_status(0));
}
// the hierarchy
let td = TempDir::new("cargo").unwrap();
assert_that(cargo_process("new").arg("foo").env("USER", "foo")
- .cwd(td.path().clone()),
+ .cwd(td.path()),
execs().with_status(0));
let toml = td.path().join("foo/Cargo.toml");
// the hierarchy
let td = TempDir::new("cargo").unwrap();
assert_that(cargo_process("new").arg("foo").env("USER", "foo \"bar\"")
- .cwd(td.path().clone()),
+ .cwd(td.path()),
execs().with_status(0));
let toml = td.path().join("foo/Cargo.toml");
assert_that(cargo_process("new").arg("foo")
.env_remove("USER")
.env("USERNAME", "foo")
- .cwd(td.path().clone()),
+ .cwd(td.path()),
execs().with_status(0));
let toml = td.path().join("foo/Cargo.toml");
.env("EMAIL", "baz2")
.env("CARGO_NAME", "bar")
.env("CARGO_EMAIL", "baz")
- .cwd(td.path().clone()),
+ .cwd(td.path()),
execs().with_status(0));
let toml = td.path().join("foo/Cargo.toml");
assert_that(cargo_process("new").arg("foo")
.env("USER", "bar")
.env("EMAIL", "baz")
- .cwd(td.path().clone()),
+ .cwd(td.path()),
execs().with_status(0));
let toml = td.path().join("foo/Cargo.toml");
assert_that(cargo_process("new").arg("foo")
.env("GIT_AUTHOR_NAME", "foo")
.env("GIT_AUTHOR_EMAIL", "gitfoo")
- .cwd(td.path().clone()),
+ .cwd(td.path()),
execs().with_status(0));
let toml = td.path().join("foo/Cargo.toml");
assert_that(cargo_process("new").arg("foo")
.env_remove("USER")
.env("GIT_COMMITTER_NAME", "gitfoo")
- .cwd(td.path().clone()),
+ .cwd(td.path()),
execs().with_status(0));
let toml = td.path().join("foo/Cargo.toml");
fn main() {}
"#);
p.build();
- File::create(p.root().join("src/main.rs")).unwrap().write_all(r#"
+ File::create(p.root().join("src/main.rs")).unwrap().write_all(br#"
fn main() { println!("A change!"); }
- "#.as_bytes()).unwrap();
+ "#).unwrap();
let mut cargo = cargo_process();
cargo.cwd(p.root());
assert_that(cargo.clone().arg("build"), execs().with_status(0));
panic!("could not create file {}: {}", p.root().join("src/foo.rs").display(), e)
});
- file.write_all(r#"
+ file.write_all(br#"
fn main() { println!("foo"); }
- "#.as_bytes()).unwrap();
+ "#).unwrap();
std::mem::drop(file);
let mut pro = process(&cargo_dir().join("cargo"));
fn setup() {
let config = paths::root().join(".cargo/config");
t!(fs::create_dir_all(config.parent().unwrap()));
- t!(t!(File::create(&config)).write_all(&format!(r#"
+ t!(t!(File::create(&config)).write_all(br#"
[registry]
token = "api-token"
- "#).as_bytes()));
+ "#));
t!(fs::create_dir_all(&upload_path().join("api/v1/crates")));
repo(®istry_path())
-> CargoResult<Vec<PackageId>> {
let summary = Summary::new(pkg.clone(), deps, HashMap::new()).unwrap();
let method = Method::Everything;
- Ok(resolver::resolve(&[(summary, method)], &[], registry)?.iter().map(|p| {
- p.clone()
- }).collect())
+ Ok(resolver::resolve(&[(summary, method)], &[], registry)?.iter().cloned().collect())
}
trait ToDep {
#[bench] fn run1(_ben: &mut test::Bencher) { }"#);
p.build();
- let ref host = rustc_host();
+ let host = &rustc_host();
// Use RUSTFLAGS to pass an argument that will generate an error
assert_that(p.cargo("build").env("RUSTFLAGS", "-Z bogus")